#[derive(RustcDecodable)]
struct Options {
arg_opts: Option<Vec<String>>,
- flag_package: Vec<String>,
+ flag_package: Option<String>,
flag_jobs: Option<u32>,
flag_features: Vec<String>,
flag_no_default_features: bool,
cargo rustc [options] [--] [<opts>...]
Options:
- -h, --help Print this message
- -p SPEC, --package SPEC ... The profile to compile for
- -j N, --jobs N The number of jobs to run in parallel
- --lib Build only this package's library
- --bin NAME Build only the specified binary
- --example NAME Build only the specified example
- --test NAME Build only the specified test target
- --bench NAME Build only the specified benchmark target
- --release Build artifacts in release mode, with optimizations
- --features FEATURES Features to compile for the package
- --no-default-features Do not compile default features for the package
- --target TRIPLE Target triple which compiles will be for
- --manifest-path PATH Path to the manifest to fetch dependencies for
- -v, --verbose Use verbose output
- -q, --quiet No output printed to stdout
- --color WHEN Coloring: auto, always, never
+ -h, --help Print this message
+ -p SPEC, --package SPEC The profile to compile for
+ -j N, --jobs N The number of jobs to run in parallel
+ --lib Build only this package's library
+ --bin NAME Build only the specified binary
+ --example NAME Build only the specified example
+ --test NAME Build only the specified test target
+ --bench NAME Build only the specified benchmark target
+ --release Build artifacts in release mode, with optimizations
+ --features FEATURES Features to compile for the package
+ --no-default-features Do not compile default features for the package
+ --target TRIPLE Target triple which compiles will be for
+ --manifest-path PATH Path to the manifest to fetch dependencies for
+ -v, --verbose Use verbose output
+ -q, --quiet No output printed to stdout
+ --color WHEN Coloring: auto, always, never
The specified target for the current package (or package specified by SPEC if
provided) will be compiled along with all of its dependencies. The specified
target: options.flag_target.as_ref().map(|t| &t[..]),
features: &options.flag_features,
no_default_features: options.flag_no_default_features,
- spec: &options.flag_package,
+ spec: &options.flag_package.map_or(Vec::new(), |s| vec![s]),
exec_engine: None,
mode: ops::CompileMode::Build,
release: options.flag_release,
#[derive(RustcDecodable)]
struct Options {
- flag_package: Option<String>,
+ flag_package: Vec<String>,
flag_aggressive: bool,
flag_precise: Option<String>,
flag_manifest_path: Option<String>,
cargo update [options]
Options:
- -h, --help Print this message
- -p SPEC, --package SPEC Package to update
- --aggressive Force updating all dependencies of <name> as well
- --precise PRECISE Update a single dependency to exactly PRECISE
- --manifest-path PATH Path to the manifest to compile
- -v, --verbose Use verbose output
- -q, --quiet No output printed to stdout
- --color WHEN Coloring: auto, always, never
+ -h, --help Print this message
+ -p SPEC, --package SPEC ... Package to update
+ --aggressive Force updating all dependencies of <name> as well
+ --precise PRECISE Update a single dependency to exactly PRECISE
+ --manifest-path PATH Path to the manifest to compile
+ -v, --verbose Use verbose output
+ -q, --quiet No output printed to stdout
+ --color WHEN Coloring: auto, always, never
This command requires that a `Cargo.lock` already exists as generated by
`cargo build` or related commands.
try!(config.shell().set_color_config(options.flag_color.as_ref().map(|s| &s[..])));
let root = try!(find_root_manifest_for_cwd(options.flag_manifest_path));
- let spec = options.flag_package.as_ref();
-
let update_opts = ops::UpdateOptions {
aggressive: options.flag_aggressive,
precise: options.flag_precise.as_ref().map(|s| &s[..]),
- to_update: spec.map(|s| &s[..]),
+ to_update: &options.flag_package,
config: config,
};
self.sources
}
- fn ensure_loaded(&mut self, namespace: &SourceId) -> CargoResult<()> {
+ fn ensure_loaded(&mut self, namespace: &SourceId, kind: Kind) -> CargoResult<()> {
match self.source_ids.get(namespace) {
// We've previously loaded this source, and we've already locked it,
// so we're not allowed to change it even if `namespace` has a
}
}
- try!(self.load(namespace, Kind::Normal));
+ try!(self.load(namespace, kind));
Ok(())
}
pub fn add_sources(&mut self, ids: &[SourceId]) -> CargoResult<()> {
for id in ids.iter() {
- try!(self.load(id, Kind::Locked));
+ try!(self.ensure_loaded(id, Kind::Locked));
}
Ok(())
}
let ret = if overrides.len() == 0 {
// Ensure the requested source_id is loaded
- try!(self.ensure_loaded(dep.source_id()));
+ try!(self.ensure_loaded(dep.source_id(), Kind::Normal));
let mut ret = Vec::new();
for (id, src) in self.sources.sources_mut() {
if id == dep.source_id() {
None => return Err(human("A Cargo.lock must exist before cleaning"))
};
- // Create a compilation context to have access to information like target
+ // Create a compilation context to have access to information like target
// filenames and such
let srcs = SourceMap::new();
let pkgs = PackageSet::new(&[]);
use std::sync::Arc;
use core::registry::PackageRegistry;
-use core::{Source, SourceId, PackageSet, Package, Target, PackageId};
+use core::{Source, SourceId, PackageSet, Package, Target};
use core::{Profile, TargetKind};
use core::resolver::Method;
use ops::{self, BuildOutput, ExecEngine};
compile_pkg(&package, options)
}
+#[allow(deprecated)] // connect => join in 1.3
pub fn compile_pkg<'a>(root_package: &Package,
options: &CompileOptions<'a>)
-> CargoResult<ops::Compilation<'a>> {
- let CompileOptions { config, jobs, target, ref spec, features,
+ let CompileOptions { config, jobs, target, spec, features,
no_default_features, release, mode,
ref filter, ref exec_engine,
ref target_rustc_args } = *options;
return Err(human("jobs must be at least 1"))
}
+ let override_ids = try!(source_ids_from_config(options.config, root_package.root()));
+
let (packages, resolve_with_overrides, sources) = {
- let override_ids =
- try!(source_ids_from_config(options.config, root_package.root()));
let mut registry = PackageRegistry::new(options.config);
- if let Some(source) = source {
- registry.preload(root_package.package_id().source_id(), source);
- } else {
- try!(registry.add_sources(&[root_package.package_id().source_id()
- .clone()]));
- }
// First, resolve the root_package's *listed* dependencies, as well as
// downloading and updating all remotes and such.
let method = Method::Required{
dev_deps: true, // TODO: remove this option?
features: &features,
- uses_default_features: !options.no_default_features,
+ uses_default_features: !no_default_features,
};
let resolved_with_overrides =
try!(ops::resolve_with_previous(&mut registry, root_package, method,
Some(&resolve), None));
- let req: Vec<PackageId> = resolved_with_overrides.iter().map(|r| {
- r.clone()
- }).collect();
- let packages = try!(registry.get(&req).chain_error(|| {
- human("Unable to get packages from source")
- }));
+ let packages = try!(ops::get_resolved_packages(&resolved_with_overrides, &mut registry));
(packages, resolved_with_overrides, registry.move_sources())
};
vec![root_package.package_id()]
};
- /*
if spec.len() > 0 && invalid_spec.len() > 0 {
return Err(human(format!("could not find package matching spec `{}`",
- invalid_spec.join(", "))));
- } */
-
- let to_builds = packages.iter().filter(|p|
- pkgids.iter().find(|&op| *op == p.package_id()).is_some()
- ).collect::<Vec<&Package>>();
-
- let mut twas = &mut vec![];
- let mut package_targets = vec![];
+ invalid_spec.connect(", "))));
+ }
- for &to_build in to_builds.iter() {
- let targets = try!(generate_targets(to_build, mode, filter, release));
+ let to_builds = packages.iter().filter(|p| pkgids.contains(&p.package_id()))
+ .collect::<Vec<_>>();
+
+ let mut general_targets = Vec::new();
+ let mut package_targets = Vec::new();
+
+ match *target_rustc_args {
+ Some(args) => {
+ if to_builds.len() == 1 {
+ let targets = try!(generate_targets(to_builds[0], mode, filter, release));
+ if targets.len() == 1 {
+ let (target, profile) = targets[0];
+ let mut profile = profile.clone();
+ profile.rustc_args = Some(args.to_vec());
+ general_targets.push((target, profile));
+ } else {
+ return Err(human("extra arguments to `rustc` can only be \
+ passed to one target, consider \
+ filtering\nthe package by passing e.g. \
+ `--lib` or `--bin NAME` to specify \
+ a single target"))
- match *target_rustc_args {
- Some(args) if targets.len() == 1 => {
- let (target, profile) = targets[0];
- let mut profile = profile.clone();
- profile.rustc_args = Some(args.to_vec());
- twas.push((target, profile));
+ }
+ } else {
+ panic!("`rustc` should not accept multiple `-p` flags")
}
- Some(_) => {
- return Err(human("extra arguments to `rustc` can only be \
- passed to one target, consider \
- filtering\nthe package by passing e.g. \
- `--lib` or `--bin NAME` to specify \
- a single target"))
+ }
+ None => {
+ for &to_build in to_builds.iter() {
+ let targets = try!(generate_targets(to_build, mode, filter, release));
+ package_targets.push((to_build, targets));
}
- None => package_targets.push((to_build, targets)),
- };
-
- }
+ }
+ };
- for targets in twas {
- let (target, ref profile) = *targets;
+ for &(target, ref profile) in &general_targets {
for &to_build in to_builds.iter() {
package_targets.push((to_build, vec![(target, profile)]));
}
}
-
let mut ret = {
let _p = profile::start("compiling");
let mut build_config = try!(scrape_build_config(config, jobs, target));
try!(ops::compile(manifest_path, &options.compile_opts));
if options.open_result {
- let name = if options.compile_opts.spec.len() > 0{
- // TODO
- try!(PackageIdSpec::parse(options.compile_opts.spec.first()
- .unwrap())).name().replace("-", "_")
- .to_string()
+ let name = if options.compile_opts.spec.len() > 1 {
+ return Err(human("Passing multiple packages and `open` is not \
+ supported"))
+ } else if options.compile_opts.spec.len() == 1 {
+ try!(PackageIdSpec::parse(&options.compile_opts.spec[0]))
+ .name().replace("-", "_").to_string()
} else {
match lib_names.iter().chain(bin_names.iter()).nth(0) {
Some(s) => s.to_string(),
pub struct UpdateOptions<'a> {
pub config: &'a Config,
- pub to_update: Option<&'a str>,
+ pub to_update: &'a [String],
pub precise: Option<&'a str>,
pub aggressive: bool,
}
let mut registry = PackageRegistry::new(opts.config);
let mut to_avoid = HashSet::new();
- match opts.to_update {
- Some(name) => {
+ if opts.to_update.len() == 0 {
+ to_avoid.extend(previous_resolve.iter());
+ } else {
+ let mut sources = Vec::new();
+ for name in opts.to_update {
let dep = try!(previous_resolve.query(name));
if opts.aggressive {
fill_with_deps(&previous_resolve, dep, &mut to_avoid,
&mut HashSet::new());
} else {
to_avoid.insert(dep);
- match opts.precise {
+ sources.push(match opts.precise {
Some(precise) => {
// TODO: see comment in `resolve.rs` as well, but this
// seems like a pretty hokey reason to single out
} else {
precise.to_string()
};
- let precise = dep.source_id().clone()
- .with_precise(Some(precise));
- try!(registry.add_sources(&[precise]));
+ dep.source_id().clone().with_precise(Some(precise))
}
None => {
- let imprecise = dep.source_id().clone()
- .with_precise(None);
- try!(registry.add_sources(&[imprecise]));
+ dep.source_id().clone().with_precise(None)
}
- }
+ });
}
}
- None => to_avoid.extend(previous_resolve.iter()),
+ try!(registry.add_sources(&sources));
}
let resolve = try!(ops::resolve_with_previous(&mut registry,
use super::layout::{Layout, LayoutProxy};
use super::{Kind, Compilation, BuildConfig};
use super::{ProcessEngine, ExecEngine};
+use super::PackagesToBuild;
#[derive(Debug, Clone, Copy)]
pub enum Platform {
/// Prepare this context, ensuring that all filesystem directories are in
/// place.
- pub fn prepare(&mut self, pkg: &'a Package,
- targets: &[(&'a Target, &'a Profile)])
- -> CargoResult<()> {
+ pub fn prepare(&mut self, root: &Package,
+ pkgs: &'a PackagesToBuild<'a>) -> CargoResult<()> {
let _p = profile::start("preparing layout");
try!(self.host.prepare().chain_error(|| {
- internal(format!("couldn't prepare build directories for `{}`",
- pkg.name()))
+ internal(format!("couldn't prepare build directories"))
}));
match self.target {
Some(ref mut target) => {
try!(target.prepare().chain_error(|| {
- internal(format!("couldn't prepare build directories \
- for `{}`", pkg.name()))
+ internal(format!("couldn't prepare build directories"))
}));
}
None => {}
}
- for &(target, profile) in targets {
- self.build_requirements(pkg, target, profile, Kind::from(target));
+ for &(pkg, ref targets) in pkgs {
+ for &(target, profile) in targets {
+ self.build_requirements(pkg, target, profile, Kind::from(target));
+ }
}
let jobs = self.jobs();
self.compilation.extra_env.insert("NUM_JOBS".to_string(),
jobs.to_string());
self.compilation.root_output =
- self.layout(pkg, Kind::Target).proxy().dest().to_path_buf();
+ self.layout(root, Kind::Target).proxy().dest().to_path_buf();
self.compilation.deps_output =
- self.layout(pkg, Kind::Target).proxy().deps().to_path_buf();
+ self.layout(root, Kind::Target).proxy().deps().to_path_buf();
return Ok(());
}
use core::{Package, Target, PackageId, PackageSet, Profile};
use util::{CargoResult, human, Human};
use util::{internal, ChainError, profile};
+use util::Freshness;
use super::job::Work;
use super::{fingerprint, process, Kind, Context, Platform};
use super::CommandType;
-use util::Freshness;
+use super::PackagesToBuild;
/// Contains the parsed output of a custom build script.
#[derive(Clone, Debug)]
/// The given set of targets to this function is the initial set of
/// targets/profiles which are being built.
pub fn build_map<'b, 'cfg>(cx: &mut Context<'b, 'cfg>,
- pkg: &'b Package,
- targets: &[(&'b Target, &'b Profile)]) {
+ pkgs: &'b PackagesToBuild<'b>) {
let mut ret = HashMap::new();
- for &(target, profile) in targets {
- build(&mut ret, Kind::Target, pkg, target, profile, cx);
- build(&mut ret, Kind::Host, pkg, target, profile, cx);
+ for &(pkg, ref targets) in pkgs {
+ for &(target, profile) in targets {
+ build(&mut ret, Kind::Target, pkg, target, profile, cx);
+ build(&mut ret, Kind::Host, pkg, target, profile, cx);
+ }
}
// Make the output a little more deterministic by sorting all dependencies
pub overrides: HashMap<String, BuildOutput>,
}
+pub type PackagesToBuild<'a> = [(&'a Package,Vec<(&'a Target,&'a Profile)>)];
+
// Returns a mapping of the root package plus its immediate dependencies to
// where the compiled libraries are all located.
-#[allow(deprecated)] // connect => join in 1.3
-pub fn compile_targets<'a, 'cfg: 'a>(pkg_targets: &'a [(&Package,
- Vec<(&Target,
- &'a Profile)>)],
+pub fn compile_targets<'a, 'cfg: 'a>(pkg_targets: &'a PackagesToBuild<'a>,
deps: &'a PackageSet,
resolve: &'a Resolve,
sources: &'a SourceMap<'cfg>,
profiles: &'a Profiles)
-> CargoResult<Compilation<'cfg>> {
- debug!("compile_targets: {}", pkg_targets.iter().map(|&(ref p, _)| p.name())
- .collect::<Vec<_>>().connect(", "));
-
try!(links::validate(deps));
let dest = if build_config.release {"release"} else {"debug"};
let _p = profile::start("preparing build directories");
// Prep the context's build requirements and see the job graph for all
// packages initially.
- for &(pkg, ref targets) in pkg_targets {
- try!(cx.prepare(pkg, targets));
- prepare_init(&mut cx, pkg, &mut queue, &mut HashSet::new());
- custom_build::build_map(&mut cx, pkg, targets);
+
+
+ try!(cx.prepare(root, pkg_targets));
+ let mut visited = HashSet::new();
+ for &(pkg, _) in pkg_targets {
+ prepare_init(&mut cx, pkg, &mut queue, &mut visited);
}
+ custom_build::build_map(&mut cx, pkg_targets);
}
for &(pkg, ref targets) in pkg_targets {
}
let kind = kind.for_target(target);
- let v =
- try!(cx.target_filenames(pkg, target, profile, kind));
+ let v = try!(cx.target_filenames(pkg, target, profile, kind));
let v = v.into_iter().map(|f| {
(target.clone(), cx.out_dir(pkg, kind, target).join(f))
}).collect::<Vec<_>>();
cx.compilation.tests.push((pkg.clone(), tests));
- if let Some(feats) = cx.resolve.features(pkg.package_id()) {
- cx.compilation.features.extend(feats.iter().cloned());
- }
+ }
+
+ if let Some(feats) = cx.resolve.features(root.package_id()) {
+ cx.compilation.features.extend(feats.iter().cloned());
}
for (&(ref pkg, _), output) in cx.build_state.outputs.lock().unwrap().iter() {
});
// Figure out what stage this work will go into
- let stage = match (target.is_lib(),
+ let dst = match (target.is_lib(),
profile.test,
target.is_custom_build()) {
- (_, _, true) => Stage::BuildCustomBuild,
- (true, true, _) => Stage::LibraryTests,
- (false, true, _) => Stage::BinaryTests,
- (true, false, _) => Stage::Libraries,
- (false, false, _) if !target.is_bin() => Stage::BinaryTests,
- (false, false, _) => Stage::Binaries,
+ (_, _, true) => jobs.queue(pkg, Stage::BuildCustomBuild),
+ (true, true, _) => jobs.queue(pkg, Stage::LibraryTests),
+ (false, true, _) => jobs.queue(pkg, Stage::BinaryTests),
+ (true, false, _) => jobs.queue(pkg, Stage::Libraries),
+ (false, false, _) if !target.is_bin() => {
+ jobs.queue(pkg, Stage::BinaryTests)
+ }
+ (false, false, _) => jobs.queue(pkg, Stage::Binaries),
};
- let dst = jobs.queue(pkg, stage);
dst.push((Job::new(dirty, fresh), freshness));
}
options: &TestOptions<'a>)
-> CargoResult<Compilation<'a>> {
let mut compilation = try!(ops::compile(manifest_path, &options.compile_opts));
- compilation.tests.iter_mut()
- .map(|&mut (_, ref mut tests)|
- tests.sort_by(|&(ref n1, _), &(ref n2, _)| n1.cmp(n2)))
- .collect::<Vec<_>>();
+ for tests in compilation.tests.iter_mut() {
+ tests.1.sort();
+ }
+
Ok(compilation)
}
let mut errors = Vec::new();
let config = options.compile_opts.config;
- let mut libs = vec![];
- for package in compilation.to_doc_test.iter() {
- libs.extend(package.targets().iter()
- .filter(|t| t.doctested())
- .map(|t| (package, t.src_path(), t.name(), t.crate_name())));
- }
-
- for (package, lib, name, crate_name) in libs {
- try!(config.shell().status("Doc-tests", name));
- let mut p = try!(compilation.rustdoc_process(package));
- p.arg("--test").arg(lib)
- .arg("--crate-name").arg(&crate_name)
- .cwd(package.root());
-
- for &rust_dep in &[&compilation.deps_output, &compilation.root_output] {
- let mut arg = OsString::from("dependency=");
- arg.push(rust_dep);
- p.arg("-L").arg(arg);
- }
- for native_dep in compilation.native_dirs.values() {
- p.arg("-L").arg(native_dep);
- }
+ let libs = compilation.to_doc_test.iter().map(|package| {
+ (package, package.targets().iter().filter(|t| t.doctested())
+ .map(|t| (t.src_path(), t.name(), t.crate_name())))
+ });
+
+ for (package, tests) in libs {
+ for (lib, name, crate_name) in tests {
+ try!(config.shell().status("Doc-tests", name));
+ let mut p = try!(compilation.rustdoc_process(package));
+ p.arg("--test").arg(lib)
+ .arg("--crate-name").arg(&crate_name)
+ .cwd(package.root());
+
+ for &rust_dep in &[&compilation.deps_output, &compilation.root_output] {
+ let mut arg = OsString::from("dependency=");
+ arg.push(rust_dep);
+ p.arg("-L").arg(arg);
+ }
+ for native_dep in compilation.native_dirs.values() {
+ p.arg("-L").arg(native_dep);
+ }
- if test_args.len() > 0 {
- p.arg("--test-args").arg(&test_args.connect(" "));
- }
+ if test_args.len() > 0 {
+ p.arg("--test-args").arg(&test_args.connect(" "));
+ }
- for feat in compilation.features.iter() {
- p.arg("--cfg").arg(&format!("feature=\"{}\"", feat));
- }
+ for feat in compilation.features.iter() {
+ p.arg("--cfg").arg(&format!("feature=\"{}\"", feat));
+ }
- for (_, libs) in compilation.libraries.iter() {
- for &(ref target, ref lib) in libs.iter() {
- // Note that we can *only* doctest rlib outputs here. A
- // staticlib output cannot be linked by the compiler (it just
- // doesn't do that). A dylib output, however, can be linked by
- // the compiler, but will always fail. Currently all dylibs are
- // built as "static dylibs" where the standard library is
- // statically linked into the dylib. The doc tests fail,
- // however, for now as they try to link the standard library
- // dynamically as well, causing problems. As a result we only
- // pass `--extern` for rlib deps and skip out on all other
- // artifacts.
- if lib.extension() != Some(OsStr::new("rlib")) &&
- !target.for_host() {
- continue
+ for (_, libs) in compilation.libraries.iter() {
+ for &(ref target, ref lib) in libs.iter() {
+ // Note that we can *only* doctest rlib outputs here. A
+ // staticlib output cannot be linked by the compiler (it just
+ // doesn't do that). A dylib output, however, can be linked by
+ // the compiler, but will always fail. Currently all dylibs are
+ // built as "static dylibs" where the standard library is
+ // statically linked into the dylib. The doc tests fail,
+ // however, for now as they try to link the standard library
+ // dynamically as well, causing problems. As a result we only
+ // pass `--extern` for rlib deps and skip out on all other
+ // artifacts.
+ if lib.extension() != Some(OsStr::new("rlib")) &&
+ !target.for_host() {
+ continue
+ }
+ let mut arg = OsString::from(target.crate_name());
+ arg.push("=");
+ arg.push(lib);
+ p.arg("--extern").arg(&arg);
}
- let mut arg = OsString::from(target.crate_name());
- arg.push("=");
- arg.push(lib);
- p.arg("--extern").arg(&arg);
}
- }
- try!(config.shell().verbose(|shell| {
- shell.status("Running", p.to_string())
- }));
- if let Err(e) = ExecEngine::exec(&mut ProcessEngine, p) {
- errors.push(e);
- if !options.no_fail_fast {
- break
+ try!(config.shell().verbose(|shell| {
+ shell.status("Running", p.to_string())
+ }));
+ if let Err(e) = ExecEngine::exec(&mut ProcessEngine, p) {
+ errors.push(e);
+ if !options.no_fail_fast {
+ return Ok(errors);
+ }
}
}
}
assert_that(process(&p.build_dir().join("debug").join("deps").join("d2")).unwrap(),
execs().with_stdout("d2"));
});
+
+test!(invalid_spec {
+ let p = project("foo")
+ .file("Cargo.toml", r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.d1]
+ path = "d1"
+
+ [[bin]]
+ name = "foo"
+ "#)
+ .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+ .file("d1/Cargo.toml", r#"
+ [package]
+ name = "d1"
+ version = "0.0.1"
+ authors = []
+
+ [[bin]]
+ name = "d1"
+ "#)
+ .file("d1/src/lib.rs", "")
+ .file("d1/src/main.rs", "fn main() { println!(\"d1\"); }");
+ p.build();
+
+ assert_that(p.cargo_process("build").arg("-p").arg("notAValidDep"),
+ execs().with_status(101).with_stderr(
+ "could not find package matching spec `notAValidDep`".to_string()));
+
+ assert_that(p.cargo_process("build").arg("-p").arg("d1").arg("-p").arg("notAValidDep"),
+ execs().with_status(101).with_stderr(
+ "could not find package matching spec `notAValidDep`".to_string()));
+
+});
{updating} registry `[..]`
", updating = UPDATING)));
});
+
+test!(update_multiple_packages {
+ let p = project("foo")
+ .file("Cargo.toml", r#"
+ [project]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+
+ [dependencies]
+ a = "*"
+ b = "*"
+ c = "*"
+ "#)
+ .file("src/main.rs", "fn main() {}");
+ p.build();
+
+ r::mock_pkg("a", "0.1.0", &[]);
+ r::mock_pkg("b", "0.1.0", &[]);
+ r::mock_pkg("c", "0.1.0", &[]);
+
+ assert_that(p.cargo("fetch"),
+ execs().with_status(0));
+
+ r::mock_pkg("a", "0.1.1", &[]);
+ r::mock_pkg("b", "0.1.1", &[]);
+ r::mock_pkg("c", "0.1.1", &[]);
+
+ assert_that(p.cargo("update").arg("-pa").arg("-pb"),
+ execs().with_status(0)
+ .with_stdout(format!("\
+{updating} registry `[..]`
+{updating} a v0.1.0 (registry [..]) -> v0.1.1
+{updating} b v0.1.0 (registry [..]) -> v0.1.1
+", updating = UPDATING)));
+
+ assert_that(p.cargo("update").arg("-pb").arg("-pc"),
+ execs().with_status(0)
+ .with_stdout(format!("\
+{updating} registry `[..]`
+{updating} c v0.1.0 (registry [..]) -> v0.1.1
+", updating = UPDATING)));
+
+ assert_that(p.cargo("build"),
+ execs().with_status(0)
+ .with_stdout_contains(format!("\
+{downloading} a v0.1.1 (registry file://[..])", downloading = DOWNLOADING))
+ .with_stdout_contains(format!("\
+{downloading} b v0.1.1 (registry file://[..])", downloading = DOWNLOADING))
+ .with_stdout_contains(format!("\
+{downloading} c v0.1.1 (registry file://[..])", downloading = DOWNLOADING))
+ .with_stdout_contains(format!("\
+{compiling} a v0.1.1 (registry [..])", compiling = COMPILING))
+ .with_stdout_contains(format!("\
+{compiling} b v0.1.1 (registry [..])", compiling = COMPILING))
+ .with_stdout_contains(format!("\
+{compiling} c v0.1.1 (registry [..])", compiling = COMPILING))
+ .with_stdout_contains(format!("\
+{compiling} foo v0.5.0 ([..])", compiling = COMPILING)));
+});
use std::path::MAIN_SEPARATOR as SEP;
use support::{execs, project};
use support::{COMPILING, RUNNING};
-use hamcrest::{assert_that, existing_file};
-use cargo::util::process;
+use hamcrest::{assert_that};
fn setup() {
url = foo.url())));
});
-test!(build_multiple_dependencies {
+test!(fail_with_multiple_packages {
let foo = project("foo")
.file("Cargo.toml", r#"
[package]
fn main() {}
"#);
foo.build();
+
let bar = project("bar")
.file("Cargo.toml", r#"
[package]
if cfg!(flag = "1") { println!("Yeah from bar!"); }
}
"#);
-
bar.build();
+
let baz = project("baz")
.file("Cargo.toml", r#"
[package]
"#);
baz.build();
- assert_that(foo.cargo_process("rustc").arg("-v").arg("-p").arg("bar")
- .arg("-p").arg("baz").arg("--").arg("--cfg").arg("flag=\"1\""),
- execs()
- .with_status(0));
-
- let bar_bin = &foo.build_dir().join("debug").join("deps").join("bar");
- assert_that(bar_bin, existing_file());
-
- assert_that(
- process(bar_bin).unwrap(),
- execs().with_stdout("Yeah from bar!\n"));
+ assert_that(foo.cargo("rustc").arg("-v").arg("-p").arg("bar")
+ .arg("-p").arg("baz"),
+ execs().with_status(1).with_stderr("\
+Invalid arguments.
- let baz_bin = &foo.build_dir().join("debug").join("deps").join("baz");
- assert_that(bar_bin, existing_file());
- assert_that(
- process(baz_bin).unwrap(),
- execs().with_stdout("Yeah from baz!\n"));
+Usage:
+ cargo rustc [options] [--] [<opts>...]".to_string()));
});